Merge "Fix matching device address using audio attributes."
diff --git a/MainlineFiles.cfg b/MainlineFiles.cfg
index 4e1d3a2..bf39c1a 100644
--- a/MainlineFiles.cfg
+++ b/MainlineFiles.cfg
@@ -30,6 +30,7 @@
media/codec2/hidl/
media/codec2/sfplugin/utils/
media/codec2/vndk/
+media/libstagefright/data/media_codecs_sw.xml
media/module/
services/mediacodec/
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index d1aa36a..0706ac1 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -130,6 +130,12 @@
return err;
}
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
+ if ((err = parcel->readInt32(&colorSpace)) != OK) {
+ ALOGE("%s: Failed to read color space from parcel", __FUNCTION__);
+ return err;
+ }
+
mWidth = width;
mHeight = height;
mFormat = format;
@@ -146,6 +152,7 @@
mHistogramCounts = std::move(histogramCounts);
mDynamicRangeProfile = dynamicRangeProfile;
mStreamUseCase = streamUseCase;
+ mColorSpace = colorSpace;
return OK;
}
@@ -238,6 +245,11 @@
return err;
}
+ if ((err = parcel->writeInt32(mColorSpace)) != OK) {
+ ALOGE("%s: Failed to write color space", __FUNCTION__);
+ return err;
+ }
+
return OK;
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 11d4960..d50566d 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -26,8 +26,8 @@
#include <system/camera_metadata.h>
#include <utils/String8.h>
-namespace android {
+namespace android {
const int OutputConfiguration::INVALID_ROTATION = -1;
const int OutputConfiguration::INVALID_SET_ID = -1;
@@ -81,6 +81,10 @@
return mDynamicRangeProfile;
}
+int32_t OutputConfiguration::getColorSpace() const {
+ return mColorSpace;
+}
+
int64_t OutputConfiguration::getStreamUseCase() const {
return mStreamUseCase;
}
@@ -103,6 +107,7 @@
mIsShared(false),
mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO) {
@@ -191,6 +196,11 @@
ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
return err;
}
+ int32_t colorSpace;
+ if ((err = parcel->readInt32(&colorSpace)) != OK) {
+ ALOGE("%s: Failed to read surface color space flag from parcel", __FUNCTION__);
+ return err;
+ }
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
if ((err = parcel->readInt64(&streamUseCase)) != OK) {
@@ -230,6 +240,7 @@
mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
mDynamicRangeProfile = dynamicProfile;
+ mColorSpace = colorSpace;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
" physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
@@ -252,6 +263,7 @@
mPhysicalCameraId = physicalId;
mIsMultiResolution = false;
mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ mColorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
mTimestampBase = TIMESTAMP_BASE_DEFAULT;
mMirrorMode = MIRROR_MODE_AUTO;
@@ -265,6 +277,7 @@
mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
mTimestampBase(TIMESTAMP_BASE_DEFAULT),
mMirrorMode(MIRROR_MODE_AUTO) { }
@@ -317,6 +330,9 @@
err = parcel->writeInt64(mDynamicRangeProfile);
if (err != OK) return err;
+ err = parcel->writeInt32(mColorSpace);
+ if (err != OK) return err;
+
err = parcel->writeInt64(mStreamUseCase);
if (err != OK) return err;
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 094a3c1..27ebb7a 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -43,7 +43,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index aaa88b2..90ee924 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -67,22 +67,26 @@
int64_t mDynamicRangeProfile;
// Stream use case
int64_t mStreamUseCase;
+ // Color space
+ int32_t mColorSpace;
CameraStreamStats() :
mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
- mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
- int streamUseCase)
+ int streamUseCase, int32_t colorSpace)
: mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
mDynamicRangeProfile(dynamicRangeProfile),
- mStreamUseCase(streamUseCase) {}
+ mStreamUseCase(streamUseCase),
+ mColorSpace(colorSpace) {}
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index b7c7f7f..a713b40 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -61,6 +61,7 @@
int getWidth() const;
int getHeight() const;
int64_t getDynamicRangeProfile() const;
+ int32_t getColorSpace() const;
bool isDeferred() const;
bool isShared() const;
String16 getPhysicalCameraId() const;
@@ -111,6 +112,7 @@
mIsMultiResolution == other.mIsMultiResolution &&
sensorPixelModesUsedEqual(other) &&
mDynamicRangeProfile == other.mDynamicRangeProfile &&
+ mColorSpace == other.mColorSpace &&
mStreamUseCase == other.mStreamUseCase &&
mTimestampBase == other.mTimestampBase &&
mMirrorMode == other.mMirrorMode);
@@ -153,6 +155,9 @@
if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
return mDynamicRangeProfile < other.mDynamicRangeProfile;
}
+ if (mColorSpace != other.mColorSpace) {
+ return mColorSpace < other.mColorSpace;
+ }
if (mStreamUseCase != other.mStreamUseCase) {
return mStreamUseCase < other.mStreamUseCase;
}
@@ -187,6 +192,7 @@
bool mIsMultiResolution;
std::vector<int32_t> mSensorPixelModesUsed;
int64_t mDynamicRangeProfile;
+ int32_t mColorSpace;
int64_t mStreamUseCase;
int mTimestampBase;
int mMirrorMode;
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 05124c0..1018b41 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -537,6 +537,7 @@
case ACAMERA_CONTROL_ENABLE_ZSL:
case ACAMERA_CONTROL_EXTENDED_SCENE_MODE:
case ACAMERA_CONTROL_ZOOM_RATIO:
+ case ACAMERA_CONTROL_SETTINGS_OVERRIDE:
case ACAMERA_EDGE_MODE:
case ACAMERA_FLASH_MODE:
case ACAMERA_HOT_PIXEL_MODE:
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index 729182e..7388678 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -209,7 +209,8 @@
* Query the capabilities of a camera device. These capabilities are
* immutable for a given camera.
*
- * <p>See {@link ACameraMetadata} document and {@link NdkCameraMetadataTags.h} for more details.</p>
+ * <p>See {@link ACameraMetadata} document and <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details.</p>
*
* <p>The caller must call {@link ACameraMetadata_free} to free the memory of the output
* characteristics.</p>
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index b331d50..a9f53dd 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -96,9 +96,12 @@
/**
* The tag identifying the entry.
*
- * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+ * <p> It is one of the values defined in
+ * <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * , and defines how the
* entry should be interpreted and which parts of the API provide it.
- * See {@link NdkCameraMetadataTags.h} for more details. </p>
+ * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details. </p>
*/
uint32_t tag;
@@ -141,9 +144,11 @@
/**
* The tag identifying the entry.
*
- * <p> It is one of the values defined in {@link NdkCameraMetadataTags.h}, and defines how the
+ * <p> It is one of the values defined in <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * , and defines how the
* entry should be interpreted and which parts of the API provide it.
- * See {@link NdkCameraMetadataTags.h} for more details. </p>
+ * See <a href="https://cs.android.com/android/platform/superproject/+/master:frameworks/av/camera/ndk/include/camera/NdkCameraMetadataTags.h">NdkCameraMetadataTags.h</a>
+ * for more details. </p>
*/
uint32_t tag;
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 0d156a5..def883b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2044,6 +2044,105 @@
*/
ACAMERA_CONTROL_ZOOM_RATIO = // float
ACAMERA_CONTROL_START + 47,
+ /**
+ * <p>The desired CaptureRequest settings override with which certain keys are
+ * applied earlier so that they can take effect sooner.</p>
+ *
+ * <p>Type: int32 (acamera_metadata_enum_android_control_settings_override_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
+ * <li>ACaptureRequest</li>
+ * </ul></p>
+ *
+ * <p>There are some CaptureRequest keys which can be applied earlier than others
+ * when controls within a CaptureRequest aren't required to take effect at the same time.
+ * One such example is zoom. Zoom can be applied at a later stage of the camera pipeline.
+ * As soon as the camera device receives the CaptureRequest, it can apply the requested
+ * zoom value onto an earlier request that's already in the pipeline, thus improves zoom
+ * latency.</p>
+ * <p>This key's value in the capture result reflects whether the controls for this capture
+ * are overridden "by" a newer request. This means that if a capture request turns on
+ * settings override, the capture result of an earlier request will contain the key value
+ * of ZOOM. On the other hand, if a capture request has settings override turned on,
+ * but all newer requests have it turned off, the key's value in the capture result will
+ * be OFF because this capture isn't overridden by a newer capture. In the two examples
+ * below, the capture results columns illustrate the settingsOverride values in different
+ * scenarios.</p>
+ * <p>Assuming the zoom settings override can speed up by 1 frame, below example illustrates
+ * the speed-up at the start of capture session:</p>
+ * <pre><code>Camera session created
+ * Request 1 (zoom=1.0x, override=ZOOM) ->
+ * Request 2 (zoom=1.2x, override=ZOOM) ->
+ * Request 3 (zoom=1.4x, override=ZOOM) -> Result 1 (zoom=1.2x, override=ZOOM)
+ * Request 4 (zoom=1.6x, override=ZOOM) -> Result 2 (zoom=1.4x, override=ZOOM)
+ * Request 5 (zoom=1.8x, override=ZOOM) -> Result 3 (zoom=1.6x, override=ZOOM)
+ * -> Result 4 (zoom=1.8x, override=ZOOM)
+ * -> Result 5 (zoom=1.8x, override=OFF)
+ * </code></pre>
+ * <p>The application can turn on settings override and use zoom as normal. The example
+ * shows that the later zoom values (1.2x, 1.4x, 1.6x, and 1.8x) overwrite the zoom
+ * values (1.0x, 1.2x, 1.4x, and 1.8x) of earlier requests (#1, #2, #3, and #4).</p>
+ * <p>The application must make sure the settings override doesn't interfere with user
+ * journeys requiring simultaneous application of all controls in CaptureRequest on the
+ * requested output targets. For example, if the application takes a still capture using
+ * CameraCaptureSession#capture, and the repeating request immediately sets a different
+ * zoom value using override, the inflight still capture could have its zoom value
+ * overwritten unexpectedly.</p>
+ * <p>So the application is strongly recommended to turn off settingsOverride when taking
+ * still/burst captures, and turn it back on when there is only repeating viewfinder
+ * request and no inflight still/burst captures.</p>
+ * <p>Below is the example demonstrating the transitions in and out of the
+ * settings override:</p>
+ * <pre><code>Request 1 (zoom=1.0x, override=OFF)
+ * Request 2 (zoom=1.2x, override=OFF)
+ * Request 3 (zoom=1.4x, override=ZOOM) -> Result 1 (zoom=1.0x, override=OFF)
+ * Request 4 (zoom=1.6x, override=ZOOM) -> Result 2 (zoom=1.4x, override=ZOOM)
+ * Request 5 (zoom=1.8x, override=OFF) -> Result 3 (zoom=1.6x, override=ZOOM)
+ * -> Result 4 (zoom=1.6x, override=OFF)
+ * -> Result 5 (zoom=1.8x, override=OFF)
+ * </code></pre>
+ * <p>This example shows that:</p>
+ * <ul>
+ * <li>The application "ramps in" settings override by setting the control to ZOOM.
+ * In the example, request #3 enables zoom settings override. Because the camera device
+ * can speed up applying zoom by 1 frame, the outputs of request #2 has 1.4x zoom, the
+ * value specified in request #3.</li>
+ * <li>The application "ramps out" of settings override by setting the control to OFF. In
+ * the example, request #5 changes the override to OFF. Because request #4's zoom
+ * takes effect in result #3, result #4's zoom remains the same until new value takes
+ * effect in result #5.</li>
+ * </ul>
+ */
+ ACAMERA_CONTROL_SETTINGS_OVERRIDE = // int32 (acamera_metadata_enum_android_control_settings_override_t)
+ ACAMERA_CONTROL_START + 49,
+ /**
+ * <p>List of available settings overrides supported by the camera device that can
+ * be used to speed up certain controls.</p>
+ *
+ * <p>Type: int32[n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>When not all controls within a CaptureRequest are required to take effect
+ * at the same time on the outputs, the camera device may apply certain request keys sooner
+ * to improve latency. This list contains such supported settings overrides. Each settings
+ * override corresponds to a set of CaptureRequest keys that can be sped up when applying.</p>
+ * <p>A supported settings override can be passed in via
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html#CONTROL_SETTINGS_OVERRIDE">CaptureRequest#CONTROL_SETTINGS_OVERRIDE</a>, and the
+ * CaptureRequest keys corresponding to the override are applied as soon as possible, not
+ * bound by per-frame synchronization. See ACAMERA_CONTROL_SETTINGS_OVERRIDE for the
+ * CaptureRequest keys for each override.</p>
+ * <p>OFF is always included in this list.</p>
+ *
+ * @see ACAMERA_CONTROL_SETTINGS_OVERRIDE
+ */
+ ACAMERA_CONTROL_AVAILABLE_SETTINGS_OVERRIDES = // int32[n]
+ ACAMERA_CONTROL_START + 50,
ACAMERA_CONTROL_END,
/**
@@ -3520,6 +3619,26 @@
*/
ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP = // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
ACAMERA_REQUEST_START + 19,
+ /**
+ * <p>A list of all possible color space profiles supported by a camera device.</p>
+ *
+ * <p>Type: int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A color space profile is a combination of a color space, an image format, and a dynamic range
+ * profile. If a camera does not support the
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT</a>
+ * capability, the dynamic range profile will always be
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/params/DynamicRangeProfiles.html#STANDARD">DynamicRangeProfiles#STANDARD</a>. Camera clients can
+ * use <a href="https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration.html#setColorSpace">SessionConfiguration#setColorSpace</a> to select
+ * a color space.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP = // int64[n*3] (acamera_metadata_enum_android_request_available_color_space_profiles_map_t)
+ ACAMERA_REQUEST_START + 21,
ACAMERA_REQUEST_END,
/**
@@ -8475,6 +8594,40 @@
} acamera_metadata_enum_android_control_extended_scene_mode_t;
+// ACAMERA_CONTROL_SETTINGS_OVERRIDE
+typedef enum acamera_metadata_enum_acamera_control_settings_override {
+ /**
+ * <p>No keys are applied sooner than the other keys when applying CaptureRequest
+ * settings to the camera device. This is the default value.</p>
+ */
+ ACAMERA_CONTROL_SETTINGS_OVERRIDE_OFF = 0,
+
+ /**
+ * <p>Zoom related keys are applied sooner than the other keys in the CaptureRequest. The
+ * zoom related keys are:</p>
+ * <ul>
+ * <li>ACAMERA_CONTROL_ZOOM_RATIO</li>
+ * <li>ACAMERA_SCALER_CROP_REGION</li>
+ * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+ * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+ * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+ * </ul>
+ * <p>Even though ACAMERA_CONTROL_AE_REGIONS, ACAMERA_CONTROL_AWB_REGIONS,
+ * and ACAMERA_CONTROL_AF_REGIONS are not directly zoom related, applications
+ * typically scale these regions together with ACAMERA_SCALER_CROP_REGION to have a
+ * consistent mapping within the current field of view. In this aspect, they are
+ * related to ACAMERA_SCALER_CROP_REGION and ACAMERA_CONTROL_ZOOM_RATIO.</p>
+ *
+ * @see ACAMERA_CONTROL_AE_REGIONS
+ * @see ACAMERA_CONTROL_AF_REGIONS
+ * @see ACAMERA_CONTROL_AWB_REGIONS
+ * @see ACAMERA_CONTROL_ZOOM_RATIO
+ * @see ACAMERA_SCALER_CROP_REGION
+ */
+ ACAMERA_CONTROL_SETTINGS_OVERRIDE_ZOOM = 1,
+
+} acamera_metadata_enum_android_control_settings_override_t;
+
// ACAMERA_EDGE_MODE
@@ -9448,6 +9601,99 @@
} acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
+// ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_color_space_profiles_map {
+ /**
+ * <p>Default value, when not explicitly specified. The Camera device will choose the color
+ * space to employ.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED = -1,
+
+ /**
+ * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB = 0,
+
+ /**
+ * <p>RGB color space sRGB standardized as IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_SRGB = 1,
+
+ /**
+ * <p>RGB color space scRGB-nl standardized as IEC 61966-2-2:2003.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_EXTENDED_SRGB = 2,
+
+ /**
+ * <p>RGB color space scRGB standardized as IEC 61966-2-2:2003.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_EXTENDED_SRGB
+ = 3,
+
+ /**
+ * <p>RGB color space BT.709 standardized as Rec. ITU-R BT.709-5.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT709 = 4,
+
+ /**
+ * <p>RGB color space BT.2020 standardized as Rec. ITU-R BT.2020-1.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020 = 5,
+
+ /**
+ * <p>RGB color space DCI-P3 standardized as SMPTE RP 431-2-2007.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DCI_P3 = 6,
+
+ /**
+ * <p>RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3 = 7,
+
+ /**
+ * <p>RGB color space NTSC, 1953 standard.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_NTSC_1953 = 8,
+
+ /**
+ * <p>RGB color space SMPTE C.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SMPTE_C = 9,
+
+ /**
+ * <p>RGB color space Adobe RGB (1998).</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ADOBE_RGB = 10,
+
+ /**
+ * <p>RGB color space ProPhoto RGB standardized as ROMM RGB ISO 22028-2:2013.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_PRO_PHOTO_RGB = 11,
+
+ /**
+ * <p>RGB color space ACES standardized as SMPTE ST 2065-1:2012.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACES = 12,
+
+ /**
+ * <p>RGB color space ACEScg standardized as Academy S-2014-004.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACESCG = 13,
+
+ /**
+ * <p>XYZ color space CIE XYZ. This color space assumes standard illuminant D50 as its white
+ * point.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_XYZ = 14,
+
+ /**
+ * <p>Lab color space CIE L<em>a</em>b*. This color space uses CIE XYZ D50 as a profile conversion
+ * space.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_LAB = 15,
+
+} acamera_metadata_enum_android_request_available_color_space_profiles_map_t;
+
// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index 3e7c415..bae8706 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -13,6 +13,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_camera_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_camera_license"],
+}
+
cc_defaults {
name: "camera_defaults",
static_libs: [
diff --git a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
index 1e94d47..5866aaf 100644
--- a/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_SessionStats_fuzzer.cpp
@@ -79,10 +79,14 @@
if (fdp.ConsumeBool()) {
parcelCamStreamStats.writeInt32(streamUseCase);
}
+ int32_t colorSpace = fdp.ConsumeIntegral<int32_t>();
+ if (fdp.ConsumeBool()) {
+ parcelCamStreamStats.writeInt32(colorSpace);
+ }
cameraStreamStats = new CameraStreamStats(width, height, format, maxPreviewFps, dataSpace,
usage, maxHalBuffers, maxAppBuffers,
- dynamicRangeProfile, streamUseCase);
+ dynamicRangeProfile, streamUseCase, colorSpace);
}
parcelCamStreamStats.setDataPosition(0);
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index dd101e2..d757cd6 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -419,30 +419,39 @@
/*
* Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
*
- * The metadata (version 1) is written as a binary array with the following format:
+ * The metadata (version 2) is written as a binary array with the following format:
* - winscope magic string (#VV1NSC0PET1ME2#, 16B).
- * - the metadata version number (4B).
- * - Realtime-to-monotonic time offset in nanoseconds (8B).
- * - the recorded frames count (8B)
+ * - the metadata version number (4B little endian).
+ * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
+ * - the recorded frames count (8B little endian)
* - for each recorded frame:
- * - System time in monotonic clock timebase in nanoseconds (8B).
+ * - System time in elapsed clock timebase in nanoseconds (8B little endian).
*
- * All numbers are Little Endian encoded.
+ *
+ * Metadata version 2 changes
+ *
+ * Use elapsed time for compatibility with other UI traces (most of them):
+ * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
+ * - Frame timestamps in elapsed clock timebase (instead of monotonic)
*/
static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
ALOGV("Writing winscope metadata");
static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
- static constexpr std::uint32_t metadataVersion = 1;
- const std::int64_t realToMonotonicTimeOffsetNs =
- systemTime(SYSTEM_TIME_REALTIME) - systemTime(SYSTEM_TIME_MONOTONIC);
+ static constexpr std::uint32_t metadataVersion = 2;
+
+ const auto elapsedTimeNs = android::elapsedRealtimeNano();
+ const std::int64_t elapsedToMonotonicTimeOffsetNs =
+ elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
+ const std::int64_t realToElapsedTimeOffsetNs =
+ systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
sp<ABuffer> buffer = new ABuffer(
kWinscopeMagicString.size() +
sizeof(decltype(metadataVersion)) +
- sizeof(decltype(realToMonotonicTimeOffsetNs)) +
+ sizeof(decltype(realToElapsedTimeOffsetNs)) +
sizeof(decltype(framesCount)) +
framesCount * sizeof(std::uint64_t)
);
@@ -454,14 +463,16 @@
writeValueLE(metadataVersion, pos);
pos += sizeof(decltype(metadataVersion));
- writeValueLE(realToMonotonicTimeOffsetNs, pos);
- pos += sizeof(decltype(realToMonotonicTimeOffsetNs));
+ writeValueLE(realToElapsedTimeOffsetNs, pos);
+ pos += sizeof(decltype(realToElapsedTimeOffsetNs));
writeValueLE(framesCount, pos);
pos += sizeof(decltype(framesCount));
for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
- writeValueLE<std::uint64_t>(timestampMonotonicUs * 1000, pos);
+ const auto timestampElapsedNs =
+ elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
+ writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
pos += sizeof(std::uint64_t);
}
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 953afc5..96a4c4a 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -671,6 +671,9 @@
void C2SoftAvcDec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftAvcDec::deleteDecoder() {
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index a27c218..15d6dcd 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -664,6 +664,9 @@
void C2SoftHevcDec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftHevcDec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 9a41910..439323c 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -732,6 +732,9 @@
void C2SoftMpeg2Dec::resetPlugin() {
mSignalledOutputEos = false;
mTimeStart = mTimeEnd = systemTime();
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
}
status_t C2SoftMpeg2Dec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 54a1d0e..3bf9c48 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -256,7 +256,9 @@
mFramesConfigured = false;
mSignalledOutputEos = false;
mSignalledError = false;
-
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
return C2_OK;
}
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 69e3a5e..e0c87f7 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -1,10 +1,21 @@
# Bug component: 1344
# go/android-fwk-media-solutions for info on areas of ownership.
-# Main owners:
+# MediaRouter and native mirroring only:
adadukin@google.com
aquilescanta@google.com
+bishoygendy@google.com
ivanbuper@google.com
-# In case of emergency:
-andrewlewis@google.com #{LAST_RESORT_SUGGESTION}
+# MediaMuxer, MediaRecorder, and seamless transcoding only:
+andrewlewis@google.com
+claincly@google.com
+
+# Everything in go/android-fwk-media-solutions not covered above:
+bachinger@google.com
+christosts@google.com
+ibaker@google.com
+michaelkatz@google.com
+rohks@google.com
+tianyifeng@google.com
+tonihei@google.com
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index a727db2..abfd8a7 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -887,6 +887,12 @@
* will be respected if both this function and {@link AAudioStreamBuilder_setChannelMask} are
* called.
*
+ * Note that if the channel count is two then it may get mixed to mono when the device only supports
+ * one channel. If the channel count is greater than two but the device's supported channel count is
+ * less than the requested value, the channels higher than the device channel will be dropped. If
+ * higher channels should be mixed or spatialized, use {@link AAudioStreamBuilder_setChannelMask}
+ * instead.
+ *
* Available since API level 26.
*
* @param builder reference provided by AAudio_createStreamBuilder()
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 44f0f50..850eb34 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -200,7 +200,7 @@
status_t AudioPlayback::waitForConsumption(bool testSeek) {
if (PLAY_STARTED != mState) return INVALID_OPERATION;
// in static buffer mode, lets not play clips with duration > 30 sec
- int retry = 30;
+ int retry = 300;
// Total number of frames in the input file.
size_t totalFrameCount = mMemCapacity / mTrack->frameSize();
while (!mStopPlaying && retry > 0) {
@@ -227,7 +227,7 @@
if (bufferPosition != setPosition) return BAD_VALUE;
mTrack->start();
}
- std::this_thread::sleep_for(std::chrono::milliseconds(300));
+ std::this_thread::sleep_for(std::chrono::milliseconds(100));
retry--;
}
if (!mStopPlaying) return TIMED_OUT;
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 445633b..4bd81c8 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -26,31 +26,18 @@
// UNIT TEST
TEST(AudioTrackTest, TestPerformanceMode) {
- std::vector<std::string> attachedDevices;
- std::vector<MixPort> mixPorts;
- std::vector<Route> routes;
- EXPECT_EQ(OK, parse_audio_policy_configuration_xml(attachedDevices, mixPorts, routes));
- std::string output_flags_string[] = {"AUDIO_OUTPUT_FLAG_FAST", "AUDIO_OUTPUT_FLAG_DEEP_BUFFER"};
+ std::vector<struct audio_port_v7> ports;
+ ASSERT_EQ(OK, listAudioPorts(ports));
audio_output_flags_t output_flags[] = {AUDIO_OUTPUT_FLAG_FAST, AUDIO_OUTPUT_FLAG_DEEP_BUFFER};
audio_flags_mask_t flags[] = {AUDIO_FLAG_LOW_LATENCY, AUDIO_FLAG_DEEP_BUFFER};
bool hasFlag = false;
for (int i = 0; i < sizeof(flags) / sizeof(flags[0]); i++) {
hasFlag = false;
- for (int j = 0; j < mixPorts.size() && !hasFlag; j++) {
- MixPort port = mixPorts[j];
- if (port.role == "source" && port.flags.find(output_flags_string[i]) != -1) {
- for (int k = 0; k < routes.size() && !hasFlag; k++) {
- if (routes[k].sources.find(port.name) != -1 &&
- std::find(attachedDevices.begin(), attachedDevices.end(), routes[k].sink) !=
- attachedDevices.end()) {
- hasFlag = true;
- std::cerr << "found port with flag " << output_flags_string[i] << "@ "
- << " port :: name : " << port.name << " role : " << port.role
- << " port :: flags : " << port.flags
- << " connected via route name : " << routes[k].name
- << " route sources : " << routes[k].sources
- << " route sink : " << routes[k].sink << std::endl;
- }
+ for (const auto& port : ports) {
+ if (port.role == AUDIO_PORT_ROLE_SOURCE && port.type == AUDIO_PORT_TYPE_MIX) {
+ if ((port.active_config.flags.output & output_flags[i]) != 0) {
+ hasFlag = true;
+ break;
}
}
}
diff --git a/media/libaudiohal/impl/EffectHalHidl.cpp b/media/libaudiohal/impl/EffectHalHidl.cpp
index 8743c04..3956a6c 100644
--- a/media/libaudiohal/impl/EffectHalHidl.cpp
+++ b/media/libaudiohal/impl/EffectHalHidl.cpp
@@ -17,11 +17,16 @@
#define LOG_TAG "EffectHalHidl"
//#define LOG_NDEBUG 0
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <android-base/stringprintf.h>
#include <common/all-versions/VersionUtils.h>
#include <cutils/native_handle.h>
+#include <cutils/properties.h>
#include <hwbinder/IPCThreadState.h>
#include <media/EffectsFactoryApi.h>
+#include <mediautils/SchedulingPolicyService.h>
#include <mediautils/TimeCheck.h>
+#include <system/audio_effects/effect_spatializer.h>
#include <utils/Log.h>
#include <util/EffectUtils.h>
@@ -50,6 +55,18 @@
effect_descriptor_t halDescriptor{};
if (EffectHalHidl::getDescriptor(&halDescriptor) == NO_ERROR) {
mIsInput = (halDescriptor.flags & EFFECT_FLAG_TYPE_PRE_PROC) == EFFECT_FLAG_TYPE_PRE_PROC;
+ const bool isSpatializer =
+ memcmp(&halDescriptor.type, FX_IID_SPATIALIZER, sizeof(effect_uuid_t)) == 0;
+ if (isSpatializer) {
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost = property_get_int32("audio.spatializer.priority", 1);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ ALOGD("%s: audio.spatializer.priority %d on effect %lld",
+ __func__, priorityBoost, (long long)effectId);
+ mHalThreadPriority = priorityBoost;
+ }
+ }
}
}
@@ -127,6 +144,8 @@
ALOGE_IF(!mEfGroup, "Event flag creation for effects failed");
return NO_INIT;
}
+
+ (void)checkHalThreadPriority();
mStatusMQ = std::move(tempStatusMQ);
return OK;
}
@@ -317,5 +336,67 @@
return result;
}
+status_t EffectHalHidl::getHalPid(pid_t *pid) const {
+ using ::android::hidl::base::V1_0::DebugInfo;
+ using ::android::hidl::manager::V1_0::IServiceManager;
+ DebugInfo debugInfo;
+ const auto ret = mEffect->getDebugInfo([&] (const auto &info) {
+ debugInfo = info;
+ });
+ if (!ret.isOk()) {
+ ALOGW("%s: cannot get effect debug info", __func__);
+ return INVALID_OPERATION;
+ }
+ if (debugInfo.pid != (int)IServiceManager::PidConstant::NO_PID) {
+ *pid = debugInfo.pid;
+ return NO_ERROR;
+ }
+ ALOGW("%s: effect debug info does not contain pid", __func__);
+ return NAME_NOT_FOUND;
+}
+
+status_t EffectHalHidl::getHalWorkerTid(pid_t *tid) {
+ int32_t reply = -1;
+ uint32_t replySize = sizeof(reply);
+ const status_t status =
+ command('gtid', 0 /* cmdSize */, nullptr /* pCmdData */, &replySize, &reply);
+ if (status == OK) {
+ *tid = (pid_t)reply;
+ } else {
+ ALOGW("%s: failed with status:%d", __func__, status);
+ }
+ return status;
+}
+
+bool EffectHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
+ if (mHalThreadPriority == kRTPriorityDisabled) {
+ return true;
+ }
+ const int err = requestPriority(
+ threadPid, threadId,
+ mHalThreadPriority, false /*isForApp*/, true /*asynchronous*/);
+ ALOGW_IF(err, "%s: failed to set RT priority %d for pid %d tid %d; error %d",
+ __func__, mHalThreadPriority, threadPid, threadId, err);
+ // Audio will still work, but may be more susceptible to glitches.
+ return err == 0;
+}
+
+status_t EffectHalHidl::checkHalThreadPriority() {
+ if (mHalThreadPriority == kRTPriorityDisabled) return OK;
+ if (mHalThreadPriority < kRTPriorityMin
+ || mHalThreadPriority > kRTPriorityMax) return BAD_VALUE;
+
+ pid_t halPid, halWorkerTid;
+ const status_t status = getHalPid(&halPid) ?: getHalWorkerTid(&halWorkerTid);
+ const bool success = status == OK && requestHalThreadPriority(halPid, halWorkerTid);
+ ALOGD("%s: effectId %lld RT priority(%d) request %s%s",
+ __func__, (long long)mEffectId, mHalThreadPriority,
+ success ? "succeeded" : "failed",
+ status == OK
+ ? base::StringPrintf(" for pid:%d tid:%d", halPid, halWorkerTid).c_str()
+ : " (pid / tid cannot be read)");
+ return success ? OK : status != OK ? status : INVALID_OPERATION /* request failed */;
+}
+
} // namespace effect
} // namespace android
diff --git a/media/libaudiohal/impl/EffectHalHidl.h b/media/libaudiohal/impl/EffectHalHidl.h
index e139768..94dcd7e 100644
--- a/media/libaudiohal/impl/EffectHalHidl.h
+++ b/media/libaudiohal/impl/EffectHalHidl.h
@@ -78,6 +78,11 @@
std::unique_ptr<StatusMQ> mStatusMQ;
EventFlag* mEfGroup;
bool mIsInput = false;
+ static constexpr int32_t kRTPriorityMin = 1;
+ static constexpr int32_t kRTPriorityMax = 3;
+ static constexpr int kRTPriorityDisabled = 0;
+ // Typical RealTime mHalThreadPriority ranges from 1 (low) to 3 (high).
+ int mHalThreadPriority = kRTPriorityDisabled;
// Can not be constructed directly by clients.
EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId);
@@ -93,6 +98,10 @@
uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
uint32_t *replySize, void *pReplyData);
status_t setProcessBuffers();
+ status_t getHalPid(pid_t *pid) const;
+ status_t getHalWorkerTid(pid_t *tid);
+ bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
+ status_t checkHalThreadPriority();
};
} // namespace effect
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 03ce329..ba511fe 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -46,10 +46,9 @@
shared_libs: [
"libaudioutils",
"libbase",
- "libbinder",
"liblog",
"libutils",
- "libvibrator",
+ "libvibratorutils",
],
relative_install_path: "soundfx",
diff --git a/media/libmedia/xsd/api/current.txt b/media/libmedia/xsd/api/current.txt
index 73b5f8d..35aa213 100644
--- a/media/libmedia/xsd/api/current.txt
+++ b/media/libmedia/xsd/api/current.txt
@@ -47,7 +47,9 @@
method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile_optional();
method public java.util.List<media.profiles.CamcorderProfiles.ImageDecodingOptional> getImageDecoding_optional();
method public java.util.List<media.profiles.CamcorderProfiles.ImageEncodingOptional> getImageEncoding_optional();
+ method public int getStartOffsetMs();
method public void setCameraId(int);
+ method public void setStartOffsetMs(int);
}
public static class CamcorderProfiles.ImageDecodingOptional {
diff --git a/media/libmedia/xsd/media_profiles.xsd b/media/libmedia/xsd/media_profiles.xsd
index 9664456..dcc3028 100644
--- a/media/libmedia/xsd/media_profiles.xsd
+++ b/media/libmedia/xsd/media_profiles.xsd
@@ -49,6 +49,7 @@
</xs:element>
</xs:choice>
<xs:attribute name="cameraId" type="xs:int"/>
+ <xs:attribute name="startOffsetMs" type="xs:int"/>
</xs:complexType>
<xs:complexType name="EncoderProfile">
<xs:sequence>
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index f3cbdb0..bdf1cbc 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -2699,7 +2699,7 @@
// This is a benign busy-wait, with the next data request generated 10 ms or more later;
// nevertheless for power reasons, we don't want to see too many of these.
- ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned");
+ ALOGV_IF(actualSize == 0 && buffer.size() > 0, "callbackwrapper: empty buffer returned");
unlock();
return actualSize;
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 6ef52c7..ad6e36a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -3355,8 +3355,8 @@
MediaCodecInfo::Attributes attr = mCodecInfo
? mCodecInfo->getAttributes()
: MediaCodecInfo::Attributes(0);
- if (!(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
- // software codec is currently ignored.
+ if (mDomain == DOMAIN_VIDEO || !(attr & MediaCodecInfo::kFlagIsSoftwareOnly)) {
+ // software audio codecs are currently ignored.
mResourceManagerProxy->addResource(MediaResource::CodecResource(
mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
}
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 5e16b34..78b7288 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -354,8 +354,19 @@
//static
void MediaCodecList::findMatchingCodecs(
- const char *mime, bool encoder, uint32_t flags, sp<AMessage> format,
+ const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
Vector<AString> *matches) {
+ findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ true);
+ if (matches->empty()) {
+ ALOGV("no matching codec found, retrying without profile check");
+ findMatchingCodecs(mime, encoder, flags, format, matches, /* checkProfile= */ false);
+ }
+}
+
+//static
+void MediaCodecList::findMatchingCodecs(
+ const char *mime, bool encoder, uint32_t flags, const sp<AMessage> &format,
+ Vector<AString> *matches, bool checkProfile) {
matches->clear();
const sp<IMediaCodecList> list = getInstance();
@@ -379,7 +390,7 @@
AString componentName = info->getCodecName();
- if (!codecHandlesFormat(mime, info, format)) {
+ if (!codecHandlesFormat(mime, info, format, checkProfile)) {
ALOGV("skipping codec '%s' which doesn't satisfy format %s",
componentName.c_str(), format->debugString(2).c_str());
continue;
@@ -400,9 +411,10 @@
}
}
-/*static*/
-bool MediaCodecList::codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info,
- sp<AMessage> format) {
+// static
+bool MediaCodecList::codecHandlesFormat(
+ const char *mime, const sp<MediaCodecInfo> &info, const sp<AMessage> &format,
+ bool checkProfile) {
if (format == nullptr) {
ALOGD("codecHandlesFormat: no format, so no extra checks");
@@ -510,7 +522,7 @@
}
int32_t profile = -1;
- if (format->findInt32("profile", &profile)) {
+ if (checkProfile && format->findInt32("profile", &profile)) {
Vector<MediaCodecInfo::ProfileLevel> profileLevels;
capabilities->getSupportedProfileLevels(&profileLevels);
auto it = profileLevels.begin();
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index fd30b35..c5b5199 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -796,6 +796,8 @@
{ "valid-samples", kKeyValidSamples },
{ "dvb-component-tag", kKeyDvbComponentTag},
{ "dvb-audio-description", kKeyDvbAudioDescription},
+ { "dvb-teletext-magazine-number", kKeyDvbTeletextMagazineNumber},
+ { "dvb-teletext-page-number", kKeyDvbTeletextPageNumber},
}
};
@@ -1014,6 +1016,16 @@
msg->setInt32("dvb-audio-description", dvbAudioDescription);
}
+ int32_t dvbTeletextMagazineNumber = 0;
+ if (meta->findInt32(kKeyDvbTeletextMagazineNumber, &dvbTeletextMagazineNumber)) {
+ msg->setInt32("dvb-teletext-magazine-number", dvbTeletextMagazineNumber);
+ }
+
+ int32_t dvbTeletextPageNumber = 0;
+ if (meta->findInt32(kKeyDvbTeletextPageNumber, &dvbTeletextPageNumber)) {
+ msg->setInt32("dvb-teletext-page-number", dvbTeletextPageNumber);
+ }
+
const char *lang;
if (meta->findCString(kKeyMediaLanguage, &lang)) {
msg->setString("language", lang);
@@ -1810,6 +1822,16 @@
meta->setInt32(kKeyDvbAudioDescription, dvbAudioDescription);
}
+ int32_t dvbTeletextMagazineNumber = 0;
+ if (msg->findInt32("dvb-teletext-magazine-number", &dvbTeletextMagazineNumber)) {
+ meta->setInt32(kKeyDvbTeletextMagazineNumber, dvbTeletextMagazineNumber);
+ }
+
+ int32_t dvbTeletextPageNumber = 0;
+ if (msg->findInt32("dvb-teletext-page-number", &dvbTeletextPageNumber)) {
+ meta->setInt32(kKeyDvbTeletextPageNumber, dvbTeletextPageNumber);
+ }
+
AString lang;
if (msg->findString("language", &lang)) {
meta->setCString(kKeyMediaLanguage, lang.c_str());
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecList.h b/media/libstagefright/include/media/stagefright/MediaCodecList.h
index 3cf455c..56c6a45 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecList.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecList.h
@@ -80,11 +80,9 @@
const char *mime,
bool createEncoder,
uint32_t flags,
- sp<AMessage> format,
+ const sp<AMessage> &format,
Vector<AString> *matchingCodecs);
- static bool codecHandlesFormat(const char *mime, sp<MediaCodecInfo> info, sp<AMessage> format);
-
static bool isSoftwareCodec(const AString &componentName);
private:
@@ -115,6 +113,20 @@
MediaCodecList(const MediaCodecList&) = delete;
MediaCodecList& operator=(const MediaCodecList&) = delete;
+
+ static void findMatchingCodecs(
+ const char *mime,
+ bool createEncoder,
+ uint32_t flags,
+ const sp<AMessage> &format,
+ Vector<AString> *matchingCodecs,
+ bool checkProfile);
+
+ static bool codecHandlesFormat(
+ const char *mime,
+ const sp<MediaCodecInfo> &info,
+ const sp<AMessage> &format,
+ bool checkProfile);
};
} // namespace android
diff --git a/media/libstagefright/include/media/stagefright/MediaMuxer.h b/media/libstagefright/include/media/stagefright/MediaMuxer.h
index 6dc70bf..33aaf11 100644
--- a/media/libstagefright/include/media/stagefright/MediaMuxer.h
+++ b/media/libstagefright/include/media/stagefright/MediaMuxer.h
@@ -56,10 +56,6 @@
*/
static MediaMuxer* create(int fd, OutputFormat format);
- // Construct the muxer with the file descriptor. Note that the MediaMuxer
- // will close this file at stop().
- MediaMuxer(int fd, OutputFormat format);
-
virtual ~MediaMuxer();
/**
@@ -135,6 +131,11 @@
sp<AMessage> getTrackFormat(size_t idx);
private:
+ // Construct the muxer with the file descriptor. Note that the MediaMuxer
+ // will close this file at stop().
+ // This constructor is made private to ensure that MediaMuxer::create() is used instead.
+ MediaMuxer(int fd, OutputFormat format);
+
const OutputFormat mFormat;
sp<MediaWriter> mWriter;
Vector< sp<MediaAdapter> > mTrackList; // Each track has its MediaAdapter.
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index cdf8d35..33f224c 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -283,6 +283,12 @@
// DVB audio description
kKeyDvbAudioDescription = 'addt', // bool (int32_t), DVB audio description only defined for
// audio component
+
+ // DVB teletext magazine number
+ kKeyDvbTeletextMagazineNumber = 'ttxm', // int32_t, DVB teletext magazine number
+
+ // DVB teletext page number
+ kKeyDvbTeletextPageNumber = 'ttxp', // int32_t, DVB teletext page number
};
enum {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index ddf797c..88f7be7 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -332,6 +332,11 @@
}
bool AAVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+ if (buffer->size() == 0) {
+ ALOGE("b/230630526 buffer->size() == 0");
+ android_errorWriteLog(0x534e4554, "230630526");
+ return false;
+ }
const uint8_t *data = buffer->data();
unsigned nalType = data[0] & 0x1f;
if (!mFirstIFrameProvided && nalType < 0x5) {
@@ -624,8 +629,7 @@
int32_t firstSeqNo = buffer->int32Data();
// This only works for FU-A type & non-start sequence
- int32_t nalType = buffer->size() >= 1 ? buffer->data()[0] & 0x1f : -1;
- if (nalType != 28 || (buffer->size() >= 2 && buffer->data()[1] & 0x80)) {
+ if (buffer->size() < 2 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[1] & 0x80) {
return firstSeqNo;
}
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index bb42d1f..72dd981 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -629,13 +629,13 @@
int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
+ CHECK(!queue->empty());
// pick the first sequence number has the start bit.
sp<ABuffer> buffer = *(queue->begin());
int32_t firstSeqNo = buffer->int32Data();
// This only works for FU-A type & non-start sequence
- unsigned nalType = buffer->data()[0] & 0x1f;
- if (nalType != 28 || buffer->data()[2] & 0x80) {
+ if (buffer->size() < 3 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[2] & 0x80) {
return firstSeqNo;
}
@@ -645,7 +645,7 @@
if (rtpTime + jit >= play) {
break;
}
- if ((data[2] & 0x80)) {
+ if (it->size() >= 3 && (data[2] & 0x80)) {
const int32_t seqNo = it->int32Data();
ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
firstSeqNo = seqNo;
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index ed31c02..b230df5 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -64,6 +64,10 @@
if (untranslated.find(err) == untranslated.end()) {
ALOGE("untranslated sf error code: %d", err);
+ char err_as_string[32];
+ snprintf(err_as_string, sizeof(err_as_string), "%d", err);
+ android_errorWriteWithInfoLog(0x534e4554, "224869524", -1,
+ err_as_string, strlen(err_as_string));
untranslated.insert(err);
}
}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
index 21ba957..7fd2752 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/MuxerTest.java
@@ -60,14 +60,12 @@
private static final String mStatsFile =
mContext.getExternalFilesDir(null) + "/Muxer." + System.currentTimeMillis() + ".csv";
private static final String TAG = "MuxerTest";
- private static final Map<String, Integer> mMapFormat = new Hashtable<String, Integer>() {
- {
- put("mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
- put("webm", MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM);
- put("3gpp", MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP);
- put("ogg", MediaMuxer.OutputFormat.MUXER_OUTPUT_OGG);
- }
- };
+ private static final Map<String, Integer> mMapFormat = Map.of(
+ "mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4,
+ "webm", MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM,
+ "3gpp", MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP,
+ "ogg", MediaMuxer.OutputFormat.MUXER_OUTPUT_OGG);
+
private String mInputFileName;
private String mFormat;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index afaa012..968b8fb 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1078,8 +1078,6 @@
clientPid = callingPid;
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
}
- adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- adjAttributionSource);
audio_session_t sessionId = input.sessionId;
if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -2298,8 +2296,7 @@
__func__, callingUid, callingPid, currentPid);
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
}
- adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- adjAttributionSource);
+
// we don't yet support anything other than linear PCM
if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -3951,7 +3948,6 @@
adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
currentPid = callingPid;
}
- adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(adjAttributionSource);
ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 87eb1f5..17bb83b 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -7361,6 +7361,27 @@
if (status != INVALID_OPERATION) {
updateHalSupportedLatencyModes_l();
}
+
+ // update priority if specified.
+ constexpr int32_t kRTPriorityMin = 1;
+ constexpr int32_t kRTPriorityMax = 3;
+ const int32_t priorityBoost =
+ property_get_int32("audio.spatializer.priority", kRTPriorityMin);
+ if (priorityBoost >= kRTPriorityMin && priorityBoost <= kRTPriorityMax) {
+ const pid_t pid = getpid();
+ const pid_t tid = getTid();
+
+ if (tid == -1) {
+ // Unusual: PlaybackThread::onFirstRef() should set the threadLoop running.
+ ALOGW("%s: audio.spatializer.priority %d ignored, thread not running",
+ __func__, priorityBoost);
+ } else {
+ ALOGD("%s: audio.spatializer.priority %d, allowing real time for pid %d tid %d",
+ __func__, priorityBoost, pid, tid);
+ sendPrioConfigEvent_l(pid, tid, priorityBoost, false /*forApp*/);
+ stream()->setHalThreadPriority(priorityBoost);
+ }
+ }
}
status_t AudioFlinger::SpatializerThread::createAudioPatch_l(const struct audio_patch *patch,
@@ -8355,6 +8376,8 @@
audio_input_flags_t inputFlags = mInput->flags;
audio_input_flags_t requestedFlags = *flags;
uint32_t sampleRate;
+ AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ attributionSource);
lStatus = initCheck();
if (lStatus != NO_ERROR) {
@@ -8369,7 +8392,7 @@
}
if (maxSharedAudioHistoryMs != 0) {
- if (!captureHotwordAllowed(attributionSource)) {
+ if (!captureHotwordAllowed(checkedAttributionSource)) {
lStatus = PERMISSION_DENIED;
goto Exit;
}
@@ -8490,16 +8513,16 @@
Mutex::Autolock _l(mLock);
int32_t startFrames = -1;
if (!mSharedAudioPackageName.empty()
- && mSharedAudioPackageName == attributionSource.packageName
+ && mSharedAudioPackageName == checkedAttributionSource.packageName
&& mSharedAudioSessionId == sessionId
- && captureHotwordAllowed(attributionSource)) {
+ && captureHotwordAllowed(checkedAttributionSource)) {
startFrames = mSharedAudioStartFrames;
}
track = new RecordTrack(this, client, attr, sampleRate,
format, channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
- attributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
+ checkedAttributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
startFrames);
lStatus = track->initCheck();
@@ -10287,28 +10310,22 @@
void AudioFlinger::MmapThread::checkInvalidTracks_l()
{
- std::vector<audio_port_handle_t> invalidPortIds;
+ sp<MmapStreamCallback> callback;
for (const sp<MmapTrack> &track : mActiveTracks) {
if (track->isInvalid()) {
- invalidPortIds.push_back(track->portId());
+ callback = mCallback.promote();
+ if (callback == nullptr && mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
+ ALOGW("Could not notify MMAP stream tear down: no onRoutingChanged callback!");
+ mNoCallbackWarningCount++;
+ }
+ break;
}
}
- if (invalidPortIds.empty()) {
- return;
+ if (callback != 0) {
+ mLock.unlock();
+ callback->onRoutingChanged(AUDIO_PORT_HANDLE_NONE);
+ mLock.lock();
}
- sp<MmapStreamCallback> callback = mCallback.promote();
- if (callback == nullptr) {
- if (mNoCallbackWarningCount < kMaxNoCallbackWarnings) {
- ALOGW("Could not notify MMAP stream tear down: no onTearDown callback!");
- mNoCallbackWarningCount++;
- }
- return;
- }
- mLock.unlock();
- for (const auto invalidPortId : invalidPortIds) {
- callback->onTearDown(invalidPortId);
- }
- mLock.lock();
}
void AudioFlinger::MmapThread::dumpInternals_l(int fd, const Vector<String16>& args __unused)
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 1f64f93..cfb296c 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -530,7 +530,10 @@
id, attr.flags);
return nullptr;
}
- return new OpPlayAudioMonitor(attributionSource, attr.usage, id);
+
+ AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+ attributionSource);
+ return new OpPlayAudioMonitor(checkedAttributionSource, attr.usage, id);
}
AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index af40d1f..d9b856b 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -741,6 +741,17 @@
msg->post();
}
+void Spatializer::resetEngineHeadPose_l() {
+ ALOGV("%s mEngine %p", __func__, mEngine.get());
+ if (mEngine == nullptr) {
+ return;
+ }
+ const std::vector<float> headToStage(6, 0.0);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEAD_TO_STAGE, headToStage);
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{SpatializerHeadTrackingMode::DISABLED});
+}
+
void Spatializer::onHeadToStagePoseMsg(const std::vector<float>& headToStage) {
ALOGV("%s", __func__);
sp<media::ISpatializerHeadTrackingCallback> callback;
@@ -792,8 +803,12 @@
}
mActualHeadTrackingMode = spatializerMode;
if (mEngine != nullptr) {
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ if (spatializerMode == SpatializerHeadTrackingMode::DISABLED) {
+ resetEngineHeadPose_l();
+ } else {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{spatializerMode});
+ }
}
callback = mHeadTrackingCallback;
mLocalLog.log("%s: %s, spatializerMode %s", __func__, media::toString(mode).c_str(),
@@ -924,16 +939,25 @@
bool lowLatencySupported = mSupportedLatencyModes.empty()
|| (std::find(mSupportedLatencyModes.begin(), mSupportedLatencyModes.end(),
AUDIO_LATENCY_MODE_LOW) != mSupportedLatencyModes.end());
- if (mSupportsHeadTracking && mPoseController != nullptr) {
- if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
- && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
- && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
- mPoseController->setHeadSensor(mHeadSensor);
- mPoseController->setScreenSensor(mScreenSensor);
- requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ if (mSupportsHeadTracking) {
+ if (mPoseController != nullptr) {
+ if (lowLatencySupported && mNumActiveTracks > 0 && mLevel != SpatializationLevel::NONE
+ && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
+ && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
+ if (mEngine != nullptr) {
+ setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
+ std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
+ }
+ mPoseController->setHeadSensor(mHeadSensor);
+ mPoseController->setScreenSensor(mScreenSensor);
+ requestedLatencyMode = AUDIO_LATENCY_MODE_LOW;
+ } else {
+ mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
+ mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
+ }
} else {
- mPoseController->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
- mPoseController->setScreenSensor(SpatializerPoseController::INVALID_SENSOR);
+ resetEngineHeadPose_l();
}
}
if (mOutput != AUDIO_IO_HANDLE_NONE) {
@@ -947,8 +971,6 @@
mEngine->setEnabled(true);
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{mLevel});
- setEffectParameter_l(SPATIALIZER_PARAM_HEADTRACKING_MODE,
- std::vector<SpatializerHeadTrackingMode>{mActualHeadTrackingMode});
} else {
setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
std::vector<SpatializationLevel>{SpatializationLevel::NONE});
@@ -970,6 +992,7 @@
mPoseController->setDisplayOrientation(mDisplayOrientation);
} else if (!isControllerNeeded && mPoseController != nullptr) {
mPoseController.reset();
+ resetEngineHeadPose_l();
}
if (mPoseController != nullptr) {
mPoseController->setDesiredMode(mDesiredHeadTrackingMode);
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index 9eb8c48..ba60cae 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -351,6 +351,12 @@
*/
void checkEngineState_l() REQUIRES(mLock);
+ /**
+ * Reset head tracking mode and recenter pose in engine: Called when the head tracking
+ * is disabled.
+ */
+ void resetEngineHeadPose_l() REQUIRES(mLock);
+
/** Effect engine descriptor */
const effect_descriptor_t mEngineDescriptor;
/** Callback interface to parent audio policy service */
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 981c569..f4ac2a1 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -156,14 +156,14 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
"android.hardware.camera.device@3.4",
"android.hardware.camera.device@3.5",
"android.hardware.camera.device@3.6",
"android.hardware.camera.device@3.7",
- "android.hardware.camera.device-V1-ndk",
+ "android.hardware.camera.device-V2-ndk",
"media_permission-aidl-cpp",
],
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index ba26ac4..c49ecb2 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -884,6 +884,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -928,7 +929,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -975,7 +976,7 @@
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
/*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
- streamInfo.timestampBase, streamInfo.mirrorMode);
+ streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace);
}
if (err != OK) {
@@ -1027,6 +1028,7 @@
int width, height, format, surfaceType;
uint64_t consumerUsage;
android_dataspace dataSpace;
+ int32_t colorSpace;
status_t err;
binder::Status res;
@@ -1040,6 +1042,7 @@
surfaceType = outputConfiguration.getSurfaceType();
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
+ colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
// Hardcode consumer usage flags: SurfaceView--0x900, SurfaceTexture--0x100.
consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
@@ -1089,7 +1092,8 @@
outputConfiguration.getDynamicRangeProfile(),
outputConfiguration.getStreamUseCase(),
outputConfiguration.getTimestampBase(),
- outputConfiguration.getMirrorMode()));
+ outputConfiguration.getMirrorMode(),
+ colorSpace));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1280,6 +1284,7 @@
int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
int mirrorMode = outputConfiguration.getMirrorMode();
for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1288,7 +1293,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
@@ -1646,7 +1651,8 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
- int64_t streamUseCase= outputConfiguration.getStreamUseCase();
+ int32_t colorSpace = outputConfiguration.getColorSpace();
+ int64_t streamUseCase = outputConfiguration.getStreamUseCase();
int timestampBase = outputConfiguration.getTimestampBase();
int mirrorMode = outputConfiguration.getMirrorMode();
for (auto& bufferProducer : bufferProducers) {
@@ -1662,7 +1668,7 @@
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index 69514f3..89a2af8 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -192,7 +192,9 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -213,7 +215,9 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) = 0;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ = 0;
/**
* Create an input stream of width, height, and format.
@@ -235,11 +239,13 @@
bool dataSpaceOverridden;
android_dataspace originalDataSpace;
int64_t dynamicRangeProfile;
+ int32_t colorSpace;
StreamInfo() : width(0), height(0), format(0), formatOverridden(false), originalFormat(0),
dataSpace(HAL_DATASPACE_UNKNOWN), dataSpaceOverridden(false),
originalDataSpace(HAL_DATASPACE_UNKNOWN),
- dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD){}
+ dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
/**
* Check whether the format matches the current or the original one in case
* it got overridden.
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index ca75102..17a4a44 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -992,7 +992,7 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -1006,7 +1006,7 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
}
static bool isRawFormat(int format) {
@@ -1027,7 +1027,7 @@
const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
uint64_t consumerUsage, int64_t dynamicRangeProfile, int64_t streamUseCase,
- int timestampBase, int mirrorMode) {
+ int timestampBase, int mirrorMode, int32_t colorSpace) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1036,10 +1036,10 @@
ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
" consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
" dynamicRangeProfile 0x%" PRIx64 ", streamUseCase %" PRId64 ", timestampBase %d,"
- " mirrorMode %d",
+ " mirrorMode %d colorSpace %d",
mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
- dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode);
+ dynamicRangeProfile, streamUseCase, timestampBase, mirrorMode, colorSpace);
status_t res;
bool wasActive = false;
@@ -1110,7 +1110,7 @@
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1125,25 +1125,25 @@
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, transport, streamSetId,
isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
- timestampBase, mirrorMode);
+ timestampBase, mirrorMode, colorSpace);
}
size_t consumerCount = consumers.size();
@@ -1231,6 +1231,7 @@
streamInfo->dataSpaceOverridden = stream->isDataSpaceOverridden();
streamInfo->originalDataSpace = stream->getOriginalDataSpace();
streamInfo->dynamicRangeProfile = stream->getDynamicRangeProfile();
+ streamInfo->colorSpace = stream->getColorSpace();
return OK;
}
@@ -1880,7 +1881,8 @@
stream->getFormat(), streamMaxPreviewFps, stream->getDataSpace(), usage,
stream->getMaxHalBuffers(),
stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
- stream->getDynamicRangeProfile(), streamUseCase);
+ stream->getDynamicRangeProfile(), streamUseCase,
+ stream->getColorSpace());
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 80b1722..f5e167e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -151,7 +151,9 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -166,7 +168,9 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO) override;
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED)
+ override;
status_t createInputStream(
uint32_t width, uint32_t height, int format, bool isMultiResolution,
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index f594f84..a78d01e 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -35,11 +35,12 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile, int64_t streamUseCase,
- bool deviceTimeBaseIsRealtime, int timestampBase) :
+ bool deviceTimeBaseIsRealtime, int timestampBase, int32_t colorSpace) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase),
+ dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime, timestampBase,
+ colorSpace),
mTotalBufferCount(0),
mMaxCachedBufferCount(0),
mHandoutTotalBufferCount(0),
@@ -93,6 +94,7 @@
}
lines.appendFormat(" Dynamic Range Profile: 0x%" PRIx64 "\n",
camera_stream::dynamic_range_profile);
+ lines.appendFormat(" Color Space: %d\n", camera_stream::color_space);
lines.appendFormat(" Stream use case: %" PRId64 "\n", camera_stream::use_case);
lines.appendFormat(" Timestamp base: %d\n", getTimestampBase());
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index ca1f238..6af0875 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -41,7 +41,8 @@
int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
- int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT);
+ int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
public:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3ef29bc..1abcd86 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -56,12 +56,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -91,11 +91,11 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
setId, isMultiResolution, dynamicRangeProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase),
+ deviceTimeBaseIsRealtime, timestampBase, colorSpace),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -131,12 +131,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
@@ -180,13 +180,13 @@
int setId, bool isMultiResolution,
int64_t dynamicRangeProfile, int64_t streamUseCase,
bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
dynamicRangeProfile, streamUseCase, deviceTimeBaseIsRealtime,
- timestampBase),
+ timestampBase, colorSpace),
mTransform(0),
mTraceFirstBuffer(true),
mUseBufferManager(false),
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index db988a0..0d758bc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -96,7 +96,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -113,7 +114,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -129,7 +131,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
virtual ~Camera3OutputStream();
@@ -273,7 +276,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
/**
* Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 9215f23..da45227 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -35,12 +35,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport,
int setId, bool useHalBufManager, int64_t dynamicProfile,
int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
- int mirrorMode) :
+ int mirrorMode, int32_t colorSpace) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
transport, consumerUsage, timestampOffset, setId,
/*isMultiResolution*/false, dynamicProfile, streamUseCase,
- deviceTimeBaseIsRealtime, timestampBase, mirrorMode),
+ deviceTimeBaseIsRealtime, timestampBase, mirrorMode, colorSpace),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index aac3c2a..5167225 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -45,7 +45,8 @@
int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
bool deviceTimeBaseIsRealtime = false,
int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
- int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO);
+ int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO,
+ int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
virtual ~Camera3SharedOutputStream();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 88be9ff..4d8495f 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -55,7 +55,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase) :
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -95,6 +96,7 @@
camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
camera_stream::dynamic_range_profile = dynamicRangeProfile;
camera_stream::use_case = streamUseCase;
+ camera_stream::color_space = colorSpace;
if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
maxSize == 0) {
@@ -135,6 +137,10 @@
return camera_stream::data_space;
}
+int32_t Camera3Stream::getColorSpace() const {
+ return camera_stream::color_space;
+}
+
uint64_t Camera3Stream::getUsage() const {
return mUsage;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 214618a..f32053b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -167,6 +167,7 @@
uint32_t getHeight() const;
int getFormat() const;
android_dataspace getDataSpace() const;
+ int32_t getColorSpace() const;
uint64_t getUsage() const;
void setUsage(uint64_t usage);
void setFormatOverride(bool formatOverriden);
@@ -509,7 +510,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId, bool isMultiResolution, int64_t dynamicRangeProfile,
- int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase);
+ int64_t streamUseCase, bool deviceTimeBaseIsRealtime, int timestampBase,
+ int32_t colorSpace);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 6812e89..823be2e 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -67,6 +67,7 @@
std::unordered_set<int32_t> sensor_pixel_modes_used;
int64_t dynamic_range_profile;
int64_t use_case;
+ int32_t color_space;
} camera_stream_t;
typedef struct camera_stream_buffer {
@@ -114,20 +115,24 @@
int64_t streamUseCase;
int timestampBase;
int mirrorMode;
+ int32_t colorSpace;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0),
dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
timestampBase(OutputConfiguration::TIMESTAMP_BASE_DEFAULT),
- mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO) {}
+ mirrorMode(OutputConfiguration::MIRROR_MODE_AUTO),
+ colorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode) :
+ int64_t _dynamicRangeProfile, int _streamUseCase, int _timestampBase, int _mirrorMode,
+ int32_t _colorSpace) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
- streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode) {}
+ streamUseCase(_streamUseCase), timestampBase(_timestampBase), mirrorMode(_mirrorMode),
+ colorSpace(_colorSpace) {}
};
// Utility class to lock and unlock a GraphicBuffer
@@ -206,6 +211,7 @@
virtual int getFormat() const = 0;
virtual int64_t getDynamicRangeProfile() const = 0;
virtual android_dataspace getDataSpace() const = 0;
+ virtual int32_t getColorSpace() const = 0;
virtual void setFormatOverride(bool formatOverriden) = 0;
virtual bool isFormatOverridden() const = 0;
virtual int getOriginalFormat() const = 0;
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 4f4e581..99c067e 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -913,6 +913,7 @@
cam3stream->getOriginalFormat() : src->format);
dst.dataSpace = mapToAidlDataspace(cam3stream->isDataSpaceOverridden() ?
cam3stream->getOriginalDataSpace() : src->data_space);
+ dst.colorSpace = src->color_space;
dst.bufferSize = bufferSizes[i];
if (src->physical_camera_id != nullptr) {
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
index 74b3700..a071989 100644
--- a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -73,6 +73,10 @@
ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
} },
+ {34, {
+ ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
+ ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
+ } },
};
/**
@@ -89,4 +93,7 @@
ANDROID_SENSOR_PIXEL_MODE,
ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
} },
+ {34, {
+ ANDROID_CONTROL_SETTINGS_OVERRIDE,
+ } },
};
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index d909624..b1bf41e 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -52,7 +52,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.3",
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 4d7798c..5e2a3fb 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -49,7 +49,7 @@
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
- "android.hardware.camera.provider-V1-ndk",
+ "android.hardware.camera.provider-V2-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index ed490a8..f9afd41 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -282,6 +282,61 @@
}
}
+bool deviceReportsColorSpaces(const CameraMetadata& staticInfo) {
+ camera_metadata_ro_entry_t entry = staticInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ for (size_t i = 0; i < entry.count; ++i) {
+ uint8_t capability = entry.data.u8[i];
+ if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticInfo) {
+ int64_t colorSpace64 = colorSpace;
+ int64_t format64 = format;
+
+ // Translate HAL format + data space to public format
+ if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_V0_JFIF) {
+ format64 = 0x100; // JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) {
+ format64 = 0x48454946; // HEIC
+ } else if (format == HAL_PIXEL_FORMAT_BLOB
+ && dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_DYNAMIC_DEPTH)) {
+ format64 = 0x69656963; // DEPTH_JPEG
+ } else if (format == HAL_PIXEL_FORMAT_BLOB && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH_POINT_CLOUD, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_Y16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // DEPTH16, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW16 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH, not applicable
+ } else if (format == HAL_PIXEL_FORMAT_RAW10 && dataSpace == HAL_DATASPACE_DEPTH) {
+ return false; // RAW_DEPTH10, not applicable
+ }
+
+ camera_metadata_ro_entry_t entry =
+ staticInfo.find(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP);
+ for (size_t i = 0; i < entry.count; i += 3) {
+ bool isFormatCompatible = (format64 == entry.data.i64[i + 1]);
+ bool isDynamicProfileCompatible =
+ (dynamicRangeProfile & entry.data.i64[i + 2]) != 0;
+
+ if (colorSpace64 == entry.data.i64[i]
+ && isFormatCompatible
+ && isDynamicProfileCompatible) {
+ return true;
+ }
+ }
+
+ ALOGE("Color space %d, image format %" PRId64 ", and dynamic range 0x%" PRIx64
+ " combination not found", colorSpace, format64, dynamicRangeProfile);
+ return false;
+}
+
bool isPublicFormat(int32_t format)
{
switch(format) {
@@ -336,7 +391,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode) {
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace) {
// bufferProducer must be non-null
if (gbp == nullptr) {
String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -450,6 +506,16 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
+ if (colorSpace != ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED &&
+ SessionConfigurationUtils::deviceReportsColorSpaces(physicalCameraMetadata) &&
+ !SessionConfigurationUtils::isColorSpaceSupported(colorSpace, format, dataSpace,
+ dynamicRangeProfile, physicalCameraMetadata)) {
+ String8 msg = String8::format("Camera %s: Color space %d not supported, failed to "
+ "create output stream (pixel format %d dynamic range profile %" PRId64 ")",
+ logicalCameraId.string(), colorSpace, format, dynamicRangeProfile);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
physicalCameraMetadata)) {
String8 msg = String8::format("Camera %s: stream use case %" PRId64 " not supported,"
@@ -483,6 +549,7 @@
streamInfo.streamUseCase = streamUseCase;
streamInfo.timestampBase = timestampBase;
streamInfo.mirrorMode = mirrorMode;
+ streamInfo.colorSpace = colorSpace;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -538,6 +605,7 @@
camera3::Camera3OutputStream::applyZSLUsageQuirk(streamInfo.format, &u);
stream->usage = AidlCamera3Device::mapToAidlConsumerUsage(u);
stream->dataSpace = AidlCamera3Device::mapToAidlDataspace(streamInfo.dataSpace);
+ stream->colorSpace = streamInfo.colorSpace;
stream->rotation = AidlCamera3Device::mapToAidlStreamRotation(rotation);
stream->id = -1; // Invalid stream id
stream->physicalCameraId = std::string(physicalId.string());
@@ -635,6 +703,7 @@
String8 physicalCameraId = String8(it.getPhysicalCameraId());
int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
+ int32_t colorSpace = it.getColorSpace();
std::vector<int32_t> sensorPixelModesUsed = it.getSensorPixelModesUsed();
const CameraMetadata &physicalDeviceInfo = getMetadata(physicalCameraId,
overrideForPerfClass);
@@ -693,7 +762,7 @@
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
- streamUseCase, timestampBase, mirrorMode);
+ streamUseCase, timestampBase, mirrorMode, colorSpace);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index a127c7b..264045e 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -98,7 +98,8 @@
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
- int64_t streamUseCase, int timestampBase, int mirrorMode);
+ int64_t streamUseCase, int timestampBase, int mirrorMode,
+ int32_t colorSpace);
//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format);
@@ -109,6 +110,11 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool deviceReportsColorSpaces(const CameraMetadata& staticMeta);
+
+bool isColorSpaceSupported(int32_t colorSpace, int32_t format, android_dataspace dataSpace,
+ int64_t dynamicRangeProfile, const CameraMetadata& staticMeta);
+
bool isStreamUseCaseSupported(int64_t streamUseCase, const CameraMetadata &deviceInfo);
void mapStreamInfo(const OutputStreamInfo &streamInfo,